home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Amiga Tools 3
/
Amiga Tools 3.iso
/
programming
/
nnn1.35
/
src
/
nnn.c
< prev
next >
Wrap
C/C++ Source or Header
|
1995-08-29
|
20KB
|
689 lines
/*
* $Id: nnn.c 1.35 1995/08/29 23:50:11 daltern Exp $
*
* Program nnn
* Programmers Nicholas d'Alterio
* Gideon Denby
* Ata Etemadi (some of the i/o code)
* Date 20/02/95
*
* Synopis: This is a a neural network program which uses the
* back propagation alogorithm with a sigmoid activation
* function.
* Usage: (brief)
* nnn -h for help
* nnn < conf_file > new_conf_file training
* nnn -g < conf_file > graph_file training progress graph
* nnn -m < conf_file > new_conf_file for classification
*
* Other Associated Functions:- NetFeedForward()
* NetBackProp()
* NetTransFunc()
* NetVecRand()
* NetWriteConfig()
* NetExit()
* NetAbort()
*
* $Log: nnn.c $
* Revision 1.35 1995/08/29 23:50:11 daltern
* Now exits nicely if CTRL-C aborted, memory deallocated etc.
* Done using global variables
*
* Revision 1.34 1995/08/29 23:15:00 daltern
* coorect preprocessor symbol for conditional compilation
*
* Revision 1.33 1995/08/27 01:54:22 daltern
* changed way the time between updates is calculated. Also
* added a couple of extra messages to give better idea of
* progress in early stages of program.
*
* Revision 1.32 1995/08/26 22:16:11 daltern
* remembered why I didn't have previous change before ! removed change
*
* Revision 1.31 1995/08/26 22:12:54 daltern
* Added message if no arguments are given
*
* Revision 1.30 1995/08/26 22:05:43 daltern
* Added checking for CTRL-C aborts, conditional amiga code
* Added status messages for monitoring progress of training
*
* Revision 1.29 1995/07/11 20:03:47 daltern
* added ADOS version string
*
* Revision 1.28 1995/03/22 01:26:25 daltern
* Added new extended comments
*
* Revision 1.27 1995/03/22 01:04:27 daltern
* Now expects weights for each layer on separate lines
*
* Revision 1.26 1995/03/22 00:34:04 daltern
* BUGFIX: vectors not read in properly if training continued
* after previous attempt. Added extra flag to avoid problem
* Added code simplifying variables
*
* Revision 1.25 1995/03/20 23:40:54 daltern
* Added code for gain factor
* BUGFIX: bias node on layers above input layer were being changed
* because used for temp storage. Now reinitialised after each
* backprop
*
* Revision 1.23 1995/03/17 18:13:58 projects
* Ammended so that weights ending on a bias are ignored
*
* Revision 1.22 1995/03/17 17:52:00 projects
* Made large memory saving by only allocating the array elements
* that are needed rather than max size. Also improved speed of
* initialisation and removed max size finding code
*
* Revision 1.21 1995/03/16 19:28:54 daltern
* Added momentum rate to program. Reads in this parameter
* from config file or accepts it from command line after
* -m option. Changed the classifying option to -i
*
* Revision 1.20 1995/03/16 18:19:51 daltern
* adjusted biasing so that there is a bias node with a random
* weights going from it to all other nodes
*
* Revision 1.17 1995/03/16 10:35:44 projects
* Fixed some bad help messages with -h option
*
* Revision 1.16 1995/03/14 23:40:14 daltern
* Removed some wasted assignments
*
* Revision 1.15 1995/03/14 22:51:14 daltern
* Fixed up biasing to enable easy changing from config file
* commented properly
*
*/
#include "Neural.h"
extern char *version = "$VER: nnn 1.35 (29.08.95) Nicholas d'Alterio";
/*
* Global variables to enable proper cleanup if the program
* is interupted by CTRL-C. They should not be used for any
* other purpose.
*/
NET *netI_g;
VECTOR *vecS_g;
float **Node_g;
float ***Weight_g;
float ***Delta_weight_g;
main( int argc, char *argv[] )
{
int i,j,k; /* Loop indicies and flags */
int status;
int mode; /* FALSE = Training mode, TRUE = Classify mode */
int update;
int rateflag; /* various flags */
int seedflag;
int accuflag;
int vecflag;
int momflag;
int graphflag;
int classflag;
int accurateflag;
int gainflag;
int weightsflag;
int output_nodes; /* code simplifying variables */
int output_layer;
int class_its; /* number of iterations is training loop */
int cur_vec; /* Current vector number */
float learn_rate; /* Learning rate */
float seed; /* Seed for random number generator */
float accuracy; /* Desired output accuracy */
float mom_fac; /* momentum factor */
float usr_rate; /* User supplied command line parameters */
float usr_seed;
float usr_accuracy;
float usr_vec_cor;
float usr_mom_fac;
float usr_gain;
float vecs_correct; /* Number of classified vectors */
float per_cor; /* percentage " " "" */
float num_correct; /* number of vectors meeting accuracy condition */
float result; /* difference between target and actual output */
float gain; /* gain factor */
float **Node; /* The nodes */
float ***Weight; /* the weights between nodes */
float ***Delta_weight;
VECTOR vecS; /* The vectors to classify or train with */
NET netI; /* Information about the net sizes */
char text[MAXCHAR];
/*==========================================================================*
SET UP CTRl-C ABORT FUNCTION
*==========================================================================*/
if ( signal( SIGINT, (&NetAbort) ) == SIG_ERR ) {
fprintf( stderr, " CRTL-C breaks will not be handled correctly\n" );
} /* end if */
/*==========================================================================*
COMMAND LINE PARAMETERS
*==========================================================================*/
rateflag = FALSE;
seedflag = FALSE;
accuflag = FALSE;
vecflag = FALSE;
mode = FALSE;
graphflag = FALSE;
momflag = FALSE;
gainflag = FALSE;
for (i=1; i < argc; i++) {
if (argv[i][0] == '-') {
switch (argv[i][1]) {
case 'r': usr_rate = atof(argv[++i]); rateflag = TRUE; break;
case 's': usr_seed = atof(argv[++i]); seedflag = TRUE; break;
case 'a': usr_accuracy = atof(argv[++i]); accuflag = TRUE; break;
case 'c': usr_vec_cor = atof(argv[++i]); vecflag = TRUE; break;
case 'm': usr_mom_fac = atof(argv[++i]); momflag = TRUE; break;
case 'k': usr_gain = atof(argv[++i]); gainflag = TRUE; break;
case 'i': mode = TRUE; break;
case 'g': graphflag = TRUE; break;
case 'h':
fprintf(stderr,"Usage\n");
fprintf(stderr,"Training mode:\n");
fprintf(stderr," %s < configfile.conf > outputfile.conf \n",argv[0]);
fprintf(stderr,"Classification mode:\n");
fprintf(stderr," %s -m < configfile.conf > output \n",argv[0]);
fprintf(stderr,"These command line switches OVERRIDE values in config file:\n");
fprintf(stderr,"-r <rate> : Learning rate\n");
fprintf(stderr,"-s <seed> : Seed for random number generator\n");
fprintf(stderr,"-a <accuracy>: Desired accuracy\n");
fprintf(stderr,"-c <%%> : Desired %% of Vectors correct\n");
fprintf(stderr,"-m <mom fac> : Momentum factor in range 0-1\n");
fprintf(stderr,"-k <gain> : Gain factor\n");
fprintf(stderr,"-i : Classify input vectors in config file\n");
fprintf(stderr,"-g : Output graph to stdout instead of config file\n");
fprintf(stderr,"-h : Print this usage information\n");
exit(0);
default : fprintf(stderr,"Unrecognized option %s [Try -h for Usage]\n",argv[i]); exit(-1);
} /* end switch */
} /* end if */
} /* end for */
/*==========================================================================*
INITIALISATION
*==========================================================================*/
/*
* Read in various network parameters - anything which is not
* a number is skipped over.
*/
while (! fscanf (stdin, "%du", &netI.NumLayers))
fscanf (stdin, "%*[^\n]");
while (! fscanf (stdin, "%fu", &learn_rate))
fscanf (stdin, "%*[^\n]");
while (! fscanf (stdin, "%fu", &mom_fac))
fscanf (stdin, "%*[^\n]");
while (! fscanf (stdin, "%fu", &gain))
fscanf (stdin, "%*[^\n]");
while (! fscanf (stdin, "%fu", &seed))
fscanf (stdin, "%*[^\n]");
while (! fscanf (stdin, "%fu", &accuracy))
fscanf (stdin, "%*[^\n]");
while (! fscanf (stdin, "%fu", &vecs_correct))
fscanf (stdin, "%*[^\n]");
/*
* Replace values with command line supplied values if supplied
*/
if (rateflag == TRUE)
learn_rate = usr_rate;
if (seedflag == TRUE)
seed = usr_seed;
if (accuflag == TRUE)
accuracy = usr_accuracy;
if (vecflag == TRUE)
vecs_correct = usr_vec_cor;
if (momflag ==TRUE)
mom_fac = usr_mom_fac;
if (gainflag == TRUE)
gain = usr_gain;
/*
* Allocate memory for layer size info.
*/
MAKE1D(netI.LayerSize,int,netI.NumLayers);
netI_g = &netI;
/*
* Read in delimiter and get layer size info. This determines the
* number of nodes in each layer.
*/
fscanf(stdin,"%s",text);
if(!strcmp(text,DELIMITER[2])) {
for ( i = 0; i < netI.NumLayers; i++ ) {
fscanf(stdin,"%d",&netI.LayerSize[i]);
fscanf(stdin,"\n");
} /* end for i */
} else {
fprintf(stderr,"\n No information on net - exiting\n");
exit(1);
} /* end if */
/*
* Allocate memory for Weights, Node etc
*/
MAKE3D(Weight,float,(netI.NumLayers-1),(netI.LayerSize[i]+1),(netI.LayerSize[i+1]+1));
Weight_g = Weight;
MAKE3D(Delta_weight,float,(netI.NumLayers-1),(netI.LayerSize[i]+1),(netI.LayerSize[i+1]+1));
Delta_weight_g = Delta_weight;
MAKE2D(vecS.InVec,float,MAX_VECS,(netI.LayerSize[0]+1));
MAKE2D(vecS.OutVec,float,MAX_VECS,(netI.LayerSize[netI.NumLayers-1]+1));
vecS_g = &vecS;
/*
* Nodes array is a 2D float array except for the first row, for
* which no memory is allocated. This is just kept as a pointer for
* the input vector.
*/
Node = (float **)malloc(netI.NumLayers*sizeof(float *));
for ( i = 1; i < netI.NumLayers; i++ )
Node[i] = (float *)malloc((netI.LayerSize[i]+1)*sizeof(float));
Node_g = Node;
/*
* Read in delimiter to see if a Weights section has been supplied
*/
weightsflag = FALSE;
fscanf(stdin,"%s",text);
if (strcmp(text,DELIMITER[1])) {
/*
* weightsflag determines wether the #vectors should be read in later
* in the file. Needs to read in if weights are in config file.
*/
weightsflag = TRUE;
fprintf(stderr,"\n Using supplied Weights.\n");
/*
* Read in the Weights from config file. Each line in file is
* one layer.
*/
for ( i = 0; i < (netI.NumLayers-1); i++ ) {
for ( j = 0; j < (netI.LayerSize[i]+1); j++) {
for ( k = 1; k < (netI.LayerSize[i+1]+1); k++) {
fscanf(stdin,"%f",&Weight[i][j][k]);
} /* end for k */
} /* end for j */
} /* end for i */
} else {
/*
* If we are not in training mode warn user and exit.
*/
if (mode == TRUE) {
fprintf(stderr,"\n To classify vectors Weights MUST be supplied in config file\n");
NetExit(&netI,&vecS,Node,Weight,Delta_weight);
} /* end if */
/*
* Initialize Weights to small random values since there are no
* stored values.
*/
fprintf(stderr,"\n Initialising Weights using random numbers.\n");
SEED_FUNC((long)(netI.LayerSize[0]*seed) ); /* Choose a seed */
for ( i = 0; i < (netI.NumLayers-1); i++ ) {
for ( j = 0; j < (netI.LayerSize[i]+1); j++) {
for ( k = 0; k < (netI.LayerSize[i+1]+1); k++) {
SEED_FUNC((long) (seed*RAND_FUNC()));
Weight[i][j][k] = RAND_FUNC();
} /* end for k */
} /* end for j */
} /* end for i : initialise Weights */
} /* endif #Vectors delimiter */
/*
* Read in the vectors.
* The first value is the bias node. This is followed by the inputs
* Finally at the end of the line is the expected output. This should
* not present if in classification mode.
* The first vectors bias node is the most important since this is used
* for the rest of the networks biasing.
*/
if(mode) fscanf(stdin,"%s",text);
status = 1;
vecS.NumVecs = 0;
do {
for (i=0; i < (netI.LayerSize[0]+1); i++) {
status = fscanf(stdin,"%f ",&vecS.InVec[vecS.NumVecs][i]);
} /* end for i */
if ( !mode ) {
for (i=0; i < netI.LayerSize[netI.NumLayers-1]; i++) {
status = fscanf(stdin,"%f ",&vecS.OutVec[vecS.NumVecs][i]);
} /* end for i */
} /* end if */
} while (status != EOF && status != 0 && ( ++vecS.NumVecs < MAX_VECS ) );
/*
* Initialise the previous delta weights to 0 for the momentum factor.
*/
for ( i = 0; i < (netI.NumLayers-1); i++ ) {
for ( j = 0; j < (netI.LayerSize[i]+1); j++) {
for ( k = 0; k < (netI.LayerSize[i+1]+1); k++) {
Delta_weight[i][j][k] = 0.0;
} /* end for k */
} /* end for j */
} /* end for i */
/*
* Set up some code simplifying variables.
*/
output_layer = netI.NumLayers-1;
output_nodes = netI.LayerSize[output_layer];
/*==========================================================================*
SET UP FOR BIASES
*==========================================================================*/
/*
* Set up biases. Each layer has one bias node index [layer][0] this is
* set to the bias setting for the first vector.
*/
for ( i = 1; i < netI.NumLayers; i++ ) {
Node[i][0] = vecS.InVec[0][0];
} /* end for i */
/*
*+++
*/
if ( mode ) {
/*==========================================================================*
START MAIN CLASSIFICATION LOOP
*==========================================================================*/
/*
* This section uses the previously trained neural network to try and
* decide what the ouputs for a given vector should be.
* It feeds forwards each vector through the network and then for
* each output node it decides which binary output it should be.
* This information is then printed to config file after input and also
* to the screen.
*/
fprintf( stderr, "\n Beginning classification\n" );
for ( cur_vec = 0; cur_vec < vecS.NumVecs; cur_vec++ ) {
/*
* Make input vector the input nodes.
*/
Node[0] = vecS.InVec[cur_vec];
/*
* Feed the data through the Neural Network
*/
NetFeedForward(Weight,Node,netI,gain);
/*
* Loop for each output Node.
*/
for ( i = 0; i < output_nodes; i++ ) {
/*
* If output node is greater than 0.5 then it is in a high state
* otherwise it is in a low state.
*/
if ( Node[output_layer][i+1] >= 0.5 ) {
vecS.OutVec[cur_vec][i] = HIGH_STATE;
} else {
vecS.OutVec[cur_vec][i] = LOW_STATE;
} /* end if */
fprintf( stderr, " Vector %d Output %d is in state %.1f\n", cur_vec, i, vecS.OutVec[cur_vec][i] );
} /* end for i : loop over all output Nodes */
} /* end for cur_vec */
/*==========================================================================*
END MAIN CLASSIFICATION LOOP
*==========================================================================*/
/*
*+++
*/
} else {
/*==========================================================================*
START OF MAIN TRAINING LOOP
*==========================================================================*/
/*
* This code is the neural net training loop. In each loop the vectors
* have their order randomised. The vectors are then fed forwards in turn
* through the network and the output nodes checked against the target. If
* the difference meets the accuracy specified for the % of vectors specified
* then the loop finishes, otherwise the errors are back propagated and the
* loop begins again. If more than MAX_ITS iterations are carried out without
* training completing then the loop is ended in its current state.
*/
classflag = FALSE;
class_its = 0;
update = 0;
fprintf( stderr, "\n Beginning training\n\n" );
while ( !classflag ) {
/*
* Randomise order of vectors so that the largest possible area
* of the error surface during convergence, hopefully avoiding
* local minima.
*/
num_correct = 0;
NetVecRand( &vecS );
/*
* Loop over each vector.
*/
for ( cur_vec = 0; cur_vec < vecS.NumVecs; cur_vec++ ) {
/*
* Make the current input vector the first nodes row then feed
* that through the network to get an output.
*/
Node[0] = vecS.InVec[cur_vec];
NetFeedForward(Weight,Node,netI,gain);
/*
* Check each output node for accuracy.
*/
for ( i = 0; i < output_nodes; i++ ) {
result=fabs(vecS.OutVec[cur_vec][i]-Node[output_layer][i+1] );
/*
* Print graph data if requested.
*/
if (graphflag)fprintf(stdout,"%d\t%f\n",class_its,result);
if ( result < accuracy ) {
accurateflag = TRUE;
} else {
accurateflag = FALSE;
} /* end if : accuracy check */
} /* end for i : accuracy check */
if ( accurateflag ) num_correct++;
/*
* Backpropagate the errors to improve the weights.
*/
NetBackProp(Weight,Delta_weight,Node,netI,vecS.OutVec[cur_vec],learn_rate,mom_fac,gain);
/*
* Reset bias nodes which have been altered during back prop.
*/
for ( i = 1; i < netI.NumLayers; i++ ) {
Node[i][0] = vecS.InVec[0][0];
} /* end for i */
} /* end for cur_vec : do 1 learning cycle */
/*
* Calculate the percentage of the vectors num_correct.
*/
per_cor = ( num_correct * 100.0 ) / (float)(vecS.NumVecs);
if ( per_cor >= vecs_correct ) {
fprintf( stderr,"\n Training complete %3.1f %% of vectors classified correctly.\n", per_cor );
fprintf( stderr," training took %d iterations to complete\n\n",class_its);
classflag = TRUE;
} else {
classflag = FALSE;
} /* end if : check for classification */
/*
* Print a progress report every USER_UPDATE iterations. Also for amiga
* users check if there has been an abort request.
*/
if ( (++update) > USER_UPDATE ) {
fprintf( stderr,"Currently trained to %3.1f %% classification after %d iterations\r",per_cor, class_its );
fflush( stderr );
update = 0;
#ifdef _AMIGA
chkabort();
#endif
} /* end if */
/*
* Make sure that the Neural Network is converging to requested
* accuracy within a certain number of iterations.
*/
class_its++;
if ( class_its > MAX_ITS ) {
fprintf( stderr,"\n Can't classify to requested level\n" );
fprintf( stderr," Current classification accuracy is %3.1f %%\n", per_cor );
classflag = TRUE;
} /* end if : stop if too many iterations */
} /* end while : loop until classified */
} /* end if : train or classification mode */
/*===========================================================================*
END OF MAIN TRAINING LOOP
*===========================================================================*/
/*===========================================================================*
SAVE RESULTS AND CLEANUP
*===========================================================================*/
/*
* Write out config style to stdout if user has not chosen graph option.
*/
if (!graphflag) {
NetWriteConfig(netI,vecS,Weight,seed,learn_rate,mom_fac,accuracy,vecs_correct,gain);
} /* end if graph flag */
/*
* Free memory and exit.
*/
NetExit(&netI,&vecS,Node,Weight,Delta_weight);
} /* end program nnn */
/*==========================================================================*
END OF PROGRAM
*==========================================================================*/